f21ab10a7a978c1a470e674a3d16cd460a91a356,src/edu/stanford/nlp/sentiment/SentimentCostAndGradient.java,SentimentCostAndGradient,backpropDerivativesAndError,#Tree#TwoDimensionalMap#TwoDimensionalMap#TwoDimensionalMap#Map#Map#SimpleMatrix#,154

Before Change



    // TODO: factor this out somewhere?
    SimpleMatrix goldLabel = new SimpleMatrix(model.numClasses, 1);
    goldLabel.set(RNNCoreAnnotations.getGoldClass(tree), 1.0);

    SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);

    SimpleMatrix deltaClass = predictions.minus(goldLabel);
    SimpleMatrix localCD = deltaClass.mult(RNNUtils.concatenateWithBias(currentVector).transpose());

    double error = -(RNNUtils.elementwiseApplyLog(predictions).elementMult(goldLabel).elementSum());

After Change



    // Build a vector that looks like 0,0,1,0,0 with an indicator for the correct class
    SimpleMatrix goldLabel = new SimpleMatrix(model.numClasses, 1);
    int goldClass = RNNCoreAnnotations.getGoldClass(tree);
    goldLabel.set(goldClass, 1.0);

    double nodeWeight = model.op.getClassWeight(goldClass);

    SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);

    SimpleMatrix deltaClass = predictions.minus(goldLabel).scale(nodeWeight);
    SimpleMatrix localCD = deltaClass.mult(RNNUtils.concatenateWithBias(currentVector).transpose());

    double error = -(RNNUtils.elementwiseApplyLog(predictions).elementMult(goldLabel).elementSum());